Neurons example, pt. 1

Generate some data

import altair as alt
import numpy as np
from bayes_window import models, fake_spikes_explore, BayesWindow, BayesRegression, LMERegression
from bayes_window.generative_models import generate_fake_spikes

alt.data_transformers.disable_max_rows()
try:
    alt.renderers.enable('altair_saver', fmts=['png'])
except Exception:
    pass
/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/experimental/optimizers.py:30: FutureWarning: jax.experimental.optimizers is deprecated, import jax.example_libraries.optimizers instead
  FutureWarning)
/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/experimental/stax.py:30: FutureWarning: jax.experimental.stax is deprecated, import jax.example_libraries.stax instead
  FutureWarning)
df, df_monster, index_cols, firing_rates = generate_fake_spikes(n_trials=20,
                                                                n_neurons=6,
                                                                n_mice=3,
                                                                dur=5,
                                                                mouse_response_slope=40,
                                                                overall_stim_response_strength=5)

Exploratory plot without any fitting

Three mice, five neurons each. Mouse #0/neuron #4 has the least effect, mouse #2/neuron #0 the most

charts = fake_spikes_explore(df=df, df_monster=df_monster, index_cols=index_cols)
[chart.display() for chart in charts];
#fig_mice, fig_select, fig_neurons, fig_trials, fig_isi + fig_overlay, bar, box, fig_raster, bar_combined

Estimate with neuron as condition

ISI

df['log_isi'] = np.log10(df['isi'])
bw = BayesWindow(df_monster, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.plot(x='neuron', color='stim', detail='i_trial', add_box=False).facet(column='mouse', )
bw = BayesWindow(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.plot(x='neuron', add_box=True).facet(row='mouse', column='stim')

Vanilla regression

bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron_x_mouse'], group='mouse', detail='i_trial')
bw.fit(model=(models.model_hierarchical),
       do_make_change='divide',
       dist_y='normal',
       )

bw.chart
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
/tmp/ipykernel_7883/2811727381.py in <module>
      2 bw.fit(model=(models.model_hierarchical),
      3        do_make_change='divide',
----> 4        dist_y='normal',
      5        )
      6 

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/bayes_window/slopes.py in fit(self, model, do_make_change, fold_change_index_cols, do_mean_over_trials, fit_method, add_condition_slope, **kwargs)
     76                                 model=model,
     77                                 add_condition_slope=add_condition_slope,
---> 78                                 **kwargs)
     79         df_data = self.window.data.copy()
     80         if do_mean_over_trials:

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/bayes_window/fitting.py in fit_numpyro(progress_bar, model, num_warmup, n_draws, num_chains, convert_to_arviz, sampler, use_gpu, **kwargs)
     52                 chain_method='parallel'
     53                 )
---> 54     mcmc.run(jax.random.PRNGKey(16), **kwargs)
     55 
     56     # arviz convert

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/mcmc.py in run(self, rng_key, extra_fields, init_params, *args, **kwargs)
    574         else:
    575             if self.chain_method == "sequential":
--> 576                 states, last_state = _laxmap(partial_map_fn, map_args)
    577             elif self.chain_method == "parallel":
    578                 states, last_state = pmap(partial_map_fn)(map_args)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/mcmc.py in _laxmap(f, xs)
    160     for i in range(n):
    161         x = jit(_get_value_from_index)(xs, i)
--> 162         ys.append(f(x))
    163 
    164     return tree_multimap(lambda *args: jnp.stack(args), *ys)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/mcmc.py in _single_chain_mcmc(self, init, args, kwargs, collect_fields)
    363                 init_params,
    364                 model_args=args,
--> 365                 model_kwargs=kwargs,
    366             )
    367         sample_fn, postprocess_fn = self._get_cached_fns()

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/hmc.py in init(self, rng_key, num_warmup, init_params, model_args, model_kwargs)
    733         )
    734         if rng_key.ndim == 1:
--> 735             init_state = hmc_init_fn(init_params, rng_key)
    736         else:
    737             # XXX it is safe to run hmc_init_fn under vmap despite that hmc_init_fn changes some

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/hmc.py in <lambda>(init_params, rng_key)
    730             model_args=model_args,
    731             model_kwargs=model_kwargs,
--> 732             rng_key=rng_key,
    733         )
    734         if rng_key.ndim == 1:

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/hmc.py in init_kernel(init_params, num_warmup, step_size, inverse_mass_matrix, adapt_step_size, adapt_mass_matrix, dense_mass, target_accept_prob, trajectory_length, max_tree_depth, find_heuristic_step_size, forward_mode_differentiation, regularize_mass_matrix, model_args, model_kwargs, rng_key)
    311         z_info = IntegratorState(z=z, potential_energy=pe, z_grad=z_grad)
    312         wa_state = wa_init(
--> 313             z_info, rng_key_wa, step_size, inverse_mass_matrix=inverse_mass_matrix
    314         )
    315         r = momentum_generator(z, wa_state.mass_matrix_sqrt, rng_key_momentum)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/hmc_util.py in init_fn(z_info, rng_key, step_size, inverse_mass_matrix, mass_matrix_size)
    567         if adapt_step_size:
    568             step_size = find_reasonable_step_size(
--> 569                 step_size, inverse_mass_matrix, z_info, rng_key_ss
    570             )
    571         ss_state = ss_init(jnp.log(10 * step_size))

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/infer/hmc_util.py in find_reasonable_step_size(potential_fn, kinetic_fn, momentum_generator, init_step_size, inverse_mass_matrix, z_info, rng_key)
    376         )
    377 
--> 378     step_size, _, _, _ = while_loop(_cond_fn, _body_fn, (init_step_size, 0, 0, rng_key))
    379     return step_size
    380 

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/numpyro/util.py in while_loop(cond_fun, body_fun, init_val)
    127         return val
    128     else:
--> 129         return lax.while_loop(cond_fun, body_fun, init_val)
    130 
    131 

    [... skipping hidden 1 frame]

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/lax/control_flow.py in while_loop(cond_fun, body_fun, init_val)
    317   outs = while_p.bind(*cond_consts, *body_consts, *init_vals,
    318                       cond_nconsts=len(cond_consts), cond_jaxpr=cond_jaxpr,
--> 319                       body_nconsts=len(body_consts), body_jaxpr=body_jaxpr)
    320   return tree_unflatten(body_tree, outs)
    321 

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/core.py in bind(self, *args, **params)
    270         args, used_axis_names(self, params) if self._dispatch_on_params else None)
    271     tracers = map(top_trace.full_raise, args)
--> 272     out = top_trace.process_primitive(self, tracers, params)
    273     return map(full_lower, out) if self.multiple_results else full_lower(out)
    274 

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/core.py in process_primitive(self, primitive, tracers, params)
    622 
    623   def process_primitive(self, primitive, tracers, params):
--> 624     return primitive.impl(*tracers, **params)
    625 
    626   def process_call(self, primitive, f, tracers, params):

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in apply_primitive(prim, *args, **params)
    415   """Impl rule that compiles and runs a single primitive 'prim' using XLA."""
    416   compiled_fun = xla_primitive_callable(prim, *unsafe_map(arg_spec, args),
--> 417                                         **params)
    418   return compiled_fun(*args)
    419 

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/util.py in wrapper(*args, **kwargs)
    185         return f(*args, **kwargs)
    186       else:
--> 187         return cached(config._trace_context(), *args, **kwargs)
    188 
    189     wrapper.cache_clear = cached.cache_clear

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/util.py in cached(_, *args, **kwargs)
    178     @functools.lru_cache(max_size)
    179     def cached(_, *args, **kwargs):
--> 180       return f(*args, **kwargs)
    181 
    182     @functools.wraps(f)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in xla_primitive_callable(prim, *arg_specs, **params)
    438       return out,
    439   compiled = _xla_callable_uncached(lu.wrap_init(prim_fun), device, None,
--> 440                                     prim.name, donated_invars, *arg_specs)
    441   if not prim.multiple_results:
    442     return lambda *args, **kw: compiled(*args, **kw)[0]

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in _xla_callable_uncached(fun, device, backend, name, donated_invars, *arg_specs)
    758                            donated_invars, *arg_specs):
    759   return lower_xla_callable(fun, device, backend, name, donated_invars,
--> 760                             *arg_specs).compile().unsafe_call
    761 
    762 _xla_callable = lu.cache(_xla_callable_uncached)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in lower_xla_callable(fun, device, backend, name, donated_invars, *arg_specs)
    827   ctx = TranslationContext(c, platform, AxisEnv(nreps, (), ()),
    828                            extend_name_stack(wrap_name(name, 'jit')))
--> 829   out_nodes = jaxpr_subcomp(ctx, jaxpr, xla_consts, *xla_args)
    830   backend = xb.get_backend(backend)
    831   # There is a non-zero cost to building an output tuple, particularly on TPU.

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in jaxpr_subcomp(ctx, jaxpr, consts, *args)
    576     with source_info_util.user_context(eqn.source_info):
    577       ans = rule(ctx, map(aval, eqn.invars), map(aval, eqn.outvars),
--> 578                  *in_nodes, **eqn.params)
    579 
    580     assert isinstance(ans, collections.abc.Sequence), (ans, eqn)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/lax/control_flow.py in _while_loop_translation_rule(ctx, avals_in, avals_out, cond_jaxpr, body_jaxpr, cond_nconsts, body_nconsts, *args)
    362       body_ctx, body_jaxpr.jaxpr,
    363       _map(partial(xla.pyval_to_ir_constant, body_c), body_jaxpr.consts),
--> 364       *(y + z))
    365   if batched:
    366     body_pred_ctx = body_ctx.replace(

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in jaxpr_subcomp(ctx, jaxpr, consts, *args)
    562     op_metadata = make_op_metadata(
    563         eqn.primitive, eqn.params, name_stack=ctx.name_stack,
--> 564         source_info=eqn.source_info)
    565     ctx.builder.set_op_metadata(op_metadata)
    566     in_nodes = _flatmap(read, eqn.invars)

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/interpreters/xla.py in make_op_metadata(primitive, params, name_stack, source_info)
    103                      ) -> xc.OpMetadata:
    104   eqn_str = str(pp.text(name_stack) +
--> 105                 pp_eqn_compact(primitive.name, params, JaxprPpContext()))
    106   tracebacks[eqn_str] = source_info
    107   frame = source_info_util.user_frame(source_info) if source_info else None

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/pretty_printer.py in __str__(self)
     46 
     47   def __str__(self):
---> 48     return self.format()
     49 
     50   def __add__(self, other: 'Doc') -> 'Doc':

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/pretty_printer.py in format(self, width, use_color, annotation_prefix)
     43              annotation_prefix=" # ") -> str:
     44     return _format(self, width, use_color=use_color,
---> 45                    annotation_prefix=annotation_prefix)
     46 
     47   def __str__(self):

/opt/hostedtoolcache/Python/3.7.12/x64/lib/python3.7/site-packages/jax/_src/pretty_printer.py in _format(doc, width, use_color, annotation_prefix)
    263   while len(agenda) > 0:
    264     i, m, doc, color = agenda.pop()
--> 265     if isinstance(doc, _NilDoc):
    266       pass
    267     elif isinstance(doc, _TextDoc):

KeyboardInterrupt: 

GLM

(\(y\sim Gamma(\theta)\))

bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse', detail='i_trial')
bw.fit(model=models.model_hierarchical,
       do_make_change='subtract',
       dist_y='gamma',
       add_group_intercept=True,
       add_group_slope=True,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse', 'i_trial'))

bw.plot(x='neuron', color='mouse', independent_axes=True, finalize=True)

bw.facet(column='mouse', width=200, height=200).display()
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-1-b0507f085035> in <module>
----> 1 bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse', detail='i_trial')
      2 bw.fit(model=models.model_hierarchical,
      3        do_make_change='subtract',
      4        dist_y='gamma',
      5        add_group_intercept=True,

NameError: name 'BayesRegression' is not defined
import altair as alt

slopes = bw.trace.posterior['slope_per_group'].mean(['chain', 'draw']).to_dataframe().reset_index()
chart_slopes = alt.Chart(slopes).mark_bar().encode(
    x=alt.X('mouse_:O', title='Mouse'),
    y=alt.Y('slope_per_group', title='Slope')
)
chart_slopes
../_images/quickstart_15_0.png
bw = LMERegression(df=df, y='firing_rate', treatment='stim', condition='neuron_x_mouse', group='mouse', )
#bw.fit_anova()
bw.fit()
Using formula firing_rate ~ (1|mouse) + stim| neuron_x_mouse__0 + stim|neuron_x_mouse__1 + stim|neuron_x_mouse__2 + stim|neuron_x_mouse__3 + stim|neuron_x_mouse__4 + stim|neuron_x_mouse__5 + stim|neuron_x_mouse__6 + stim|neuron_x_mouse__7 + stim|neuron_x_mouse__8 + stim|neuron_x_mouse__9 + stim|neuron_x_mouse__10 + stim|neuron_x_mouse__11 + stim|neuron_x_mouse__12 + stim|neuron_x_mouse__13 + stim|neuron_x_mouse__14 + stim|neuron_x_mouse__15 + stim|neuron_x_mouse__16 + stim|neuron_x_mouse__17
                              Coef. Std.Err.       z  P>|z|    [0.025   0.975]
Intercept                  -186.613  129.048  -1.446  0.148  -439.543   66.316
1 | mouse                   192.950   67.060   2.877  0.004    61.514  324.385
stim | neuron_x_mouse__0    107.321  160.353   0.669  0.503  -206.965  421.607
stim | neuron_x_mouse__1    163.943  160.353   1.022  0.307  -150.344  478.229
stim | neuron_x_mouse__2    241.525  160.353   1.506  0.132   -72.762  555.811
stim | neuron_x_mouse__3    175.713  160.353   1.096  0.273  -138.574  489.999
stim | neuron_x_mouse__4    172.324  160.353   1.075  0.283  -141.962  486.610
stim | neuron_x_mouse__5    302.769  160.353   1.888  0.059   -11.518  617.055
stim | neuron_x_mouse__6     44.522  160.353   0.278  0.781  -269.764  358.808
stim | neuron_x_mouse__7    102.772  160.353   0.641  0.522  -211.515  417.058
stim | neuron_x_mouse__8     54.069  160.353   0.337  0.736  -260.217  368.356
stim | neuron_x_mouse__9     75.829  160.353   0.473  0.636  -238.458  390.115
stim | neuron_x_mouse__10    95.533  160.353   0.596  0.551  -218.754  409.819
stim | neuron_x_mouse__11    73.702  160.353   0.460  0.646  -240.584  387.988
stim | neuron_x_mouse__12  -327.977  158.297  -2.072  0.038  -638.233  -17.720
stim | neuron_x_mouse__13  -281.101  158.297  -1.776  0.076  -591.357   29.155
stim | neuron_x_mouse__14  -302.605  158.297  -1.912  0.056  -612.861    7.652
stim | neuron_x_mouse__15    17.929  158.297   0.113  0.910  -292.327  328.186
stim | neuron_x_mouse__16  -285.587  158.297  -1.804  0.071  -595.843   24.670
stim | neuron_x_mouse__17  -224.729  158.297  -1.420  0.156  -534.985   85.527
Group Var                  8037.054   24.656                                  
Please use window.regression_charts(): mouse is not in Index(['neuron_x_mouse', 'center interval', 'Std.Err.', 'z', 'p',
       'higher interval', 'lower interval', 'zero'],
      dtype='object')
<bayes_window.workflow.BayesWindow at 0x7fc9e2766280>
bw.plot(x='neuron_x_mouse:O')
../_images/quickstart_17_0.png

Firing rate

bw = BayesRegression(df=df, y='firing_rate', treatment='stim', condition='neuron_x_mouse', group='mouse')
bw.fit(model=models.model_hierarchical, do_make_change='subtract',
       progress_bar=False,
       dist_y='student',
       add_group_slope=True, add_group_intercept=False,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse'))

bw.plot(x='neuron', color='mouse', independent_axes=True, finalize=True)
bw.facet(column='mouse', width=200, height=200).display()
../_images/quickstart_19_1.png

ANOVA may not be appropriate here: It considers every neuron. If we look hard enough, surely we’ll find a responsive neuron or two out of hundreds?

bw = LMERegression(df=df, y='firing_rate', treatment='stim', condition='neuron_x_mouse', group='mouse')

bw.fit(formula='firing_rate ~ stim+ mouse + stim*mouse + neuron_x_mouse + stim * neuron_x_mouse');
firing_rate ~ stim+ mouse + stim*mouse + neuron_x_mouse + stim * neuron_x_mouse
                         sum_sq   df       F  PR(>F)
stim                     -0.00  1.0   -0.00    1.00
mouse                  6491.97  1.0    3.07    0.22
stim:mouse             7014.97  1.0    3.32    0.21
neuron_x_mouse       255069.09  1.0  120.81    0.01
stim:neuron_x_mouse   99777.33  1.0   47.26    0.02
Residual               4222.48  2.0     NaN     NaN

Model quality

# Vanilla robust no interept or slope
bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.fit(model=(models.model_hierarchical),
       do_make_change='subtract',
       dist_y='student',
       robust_slopes=True,
       add_group_intercept=False,
       add_group_slope=False,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse'))

bw.plot_model_quality()
../_images/quickstart_23_1.png ../_images/quickstart_23_2.png
# Vanilla robust, intercept only
bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.fit(model=(models.model_hierarchical),
       do_make_change='subtract',
       dist_y='student',
       robust_slopes=True,
       add_group_intercept=True,
       add_group_slope=False,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse'))

bw.plot_model_quality()
../_images/quickstart_24_1.png ../_images/quickstart_24_2.png
# Vanilla robust, slopes only
bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.fit(model=(models.model_hierarchical),
       do_make_change='subtract',
       dist_y='student',
       robust_slopes=True,
       add_group_intercept=False,
       add_group_slope=True,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse'))

bw.plot_model_quality()
../_images/quickstart_25_1.png ../_images/quickstart_25_2.png
# Vanilla robust intercept and group
bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.fit(model=(models.model_hierarchical),
       do_make_change='subtract',
       dist_y='student',
       robust_slopes=True,
       add_group_intercept=True,
       add_group_slope=True,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse'))

bw.plot_model_quality()
../_images/quickstart_26_1.png ../_images/quickstart_26_2.png
# Gamma GLM intercept only
bw = BayesRegression(df=df, y='isi', treatment='stim', condition=['neuron', 'mouse'], group='mouse')
bw.fit(model=(models.model_hierarchical),
       do_make_change='subtract',
       dist_y='gamma',
       robust_slopes=False,
       add_group_intercept=True,
       add_group_slope=False,
       fold_change_index_cols=('stim', 'mouse', 'neuron', 'neuron_x_mouse'))

bw.plot_model_quality()
n(Divergences) = 4
../_images/quickstart_27_2.png ../_images/quickstart_27_3.png

group slopes+ group intercepts=>divergences

LME fails

bw = LMERegression(df=df, y='isi', treatment='stim', condition=['neuron_x_mouse'], group='mouse', )
bw.fit(add_data=False, add_group_intercept=True, add_group_slope=False)
Using formula isi ~ (1|mouse) + stim| neuron_x_mouse__0 + stim|neuron_x_mouse__1 + stim|neuron_x_mouse__2 + stim|neuron_x_mouse__3 + stim|neuron_x_mouse__4 + stim|neuron_x_mouse__5 + stim|neuron_x_mouse__6 + stim|neuron_x_mouse__7 + stim|neuron_x_mouse__8 + stim|neuron_x_mouse__9 + stim|neuron_x_mouse__10 + stim|neuron_x_mouse__11 + stim|neuron_x_mouse__12 + stim|neuron_x_mouse__13 + stim|neuron_x_mouse__14 + stim|neuron_x_mouse__15 + stim|neuron_x_mouse__16 + stim|neuron_x_mouse__17
                            Coef. Std.Err.        z  P>|z|  [0.025  0.975]
Intercept                   0.079    0.014    5.802  0.000   0.053   0.106
1 | mouse                  -0.007    0.007   -0.941  0.347  -0.021   0.007
stim | neuron_x_mouse__0    0.007    0.003    2.597  0.009   0.002   0.012
stim | neuron_x_mouse__1   -0.004    0.003   -1.715  0.086  -0.009   0.001
stim | neuron_x_mouse__2   -0.012    0.003   -4.695  0.000  -0.017  -0.007
stim | neuron_x_mouse__3   -0.015    0.003   -5.781  0.000  -0.020  -0.010
stim | neuron_x_mouse__4   -0.025    0.003   -9.926  0.000  -0.030  -0.020
stim | neuron_x_mouse__5   -0.032    0.003  -12.456  0.000  -0.037  -0.027
stim | neuron_x_mouse__6    0.025    0.003    9.747  0.000   0.020   0.030
stim | neuron_x_mouse__7    0.015    0.003    5.804  0.000   0.010   0.020
stim | neuron_x_mouse__8    0.005    0.003    1.934  0.053  -0.000   0.010
stim | neuron_x_mouse__9   -0.003    0.003   -1.357  0.175  -0.008   0.002
stim | neuron_x_mouse__10  -0.010    0.003   -3.860  0.000  -0.015  -0.005
stim | neuron_x_mouse__11  -0.012    0.003   -4.831  0.000  -0.017  -0.007
stim | neuron_x_mouse__12   0.025    0.003    9.806  0.000   0.020   0.030
stim | neuron_x_mouse__13   0.019    0.003    7.260  0.000   0.014   0.024
stim | neuron_x_mouse__14   0.005    0.003    2.146  0.032   0.000   0.010
stim | neuron_x_mouse__15   0.002    0.003    0.747  0.455  -0.003   0.007
stim | neuron_x_mouse__16  -0.001    0.003   -0.443  0.658  -0.006   0.004
stim | neuron_x_mouse__17  -0.009    0.003   -3.518  0.000  -0.014  -0.004
Group Var                   0.000    0.017                                
Please use window.regression_charts(): mouse is not in Index(['neuron_x_mouse', 'center interval', 'Std.Err.', 'z', 'p',
       'higher interval', 'lower interval', 'zero'],
      dtype='object')
<bayes_window.workflow.BayesWindow at 0x7fc9b83bbac0>
bw.chart.display()
#bw.facet(column='mouse').display()
"Proper faceting will work when data addition is implemented in fit_lme()"
../_images/quickstart_31_0.png
'Proper faceting will work when data addition is implemented in fit_lme()'
bw = LMERegression(df=df, y='isi', treatment='stim', condition=['neuron_x_mouse'], group='mouse', )
bw.fit(add_data=False, add_group_intercept=True, add_group_slope=True)
Using formula isi ~ (stim|mouse)  + stim| neuron_x_mouse__0 + stim|neuron_x_mouse__1 + stim|neuron_x_mouse__2 + stim|neuron_x_mouse__3 + stim|neuron_x_mouse__4 + stim|neuron_x_mouse__5 + stim|neuron_x_mouse__6 + stim|neuron_x_mouse__7 + stim|neuron_x_mouse__8 + stim|neuron_x_mouse__9 + stim|neuron_x_mouse__10 + stim|neuron_x_mouse__11 + stim|neuron_x_mouse__12 + stim|neuron_x_mouse__13 + stim|neuron_x_mouse__14 + stim|neuron_x_mouse__15 + stim|neuron_x_mouse__16 + stim|neuron_x_mouse__17
                            Coef. Std.Err.        z  P>|z|  [0.025  0.975]
Intercept                   0.075    0.010    7.760  0.000   0.056   0.094
stim | mouse               -0.007    0.007   -0.941  0.347  -0.021   0.007
stim | neuron_x_mouse__0    0.005    0.003    1.396  0.163  -0.002   0.011
stim | neuron_x_mouse__1   -0.006    0.003   -1.908  0.056  -0.013   0.000
stim | neuron_x_mouse__2   -0.014    0.003   -4.192  0.000  -0.020  -0.007
stim | neuron_x_mouse__3   -0.017    0.003   -5.025  0.000  -0.023  -0.010
stim | neuron_x_mouse__4   -0.027    0.003   -8.202  0.000  -0.034  -0.021
stim | neuron_x_mouse__5   -0.034    0.003  -10.141  0.000  -0.040  -0.027
stim | neuron_x_mouse__6    0.030    0.006    5.268  0.000   0.019   0.041
stim | neuron_x_mouse__7    0.020    0.006    3.478  0.001   0.009   0.031
stim | neuron_x_mouse__8    0.010    0.006    1.722  0.085  -0.001   0.021
stim | neuron_x_mouse__9    0.001    0.006    0.229  0.819  -0.010   0.012
stim | neuron_x_mouse__10  -0.005    0.006   -0.907  0.364  -0.016   0.006
stim | neuron_x_mouse__11  -0.008    0.006   -1.348  0.178  -0.019   0.003
stim | neuron_x_mouse__12   0.023    0.003    7.067  0.000   0.017   0.029
stim | neuron_x_mouse__13   0.017    0.003    5.075  0.000   0.010   0.023
stim | neuron_x_mouse__14   0.003    0.003    1.073  0.283  -0.003   0.010
stim | neuron_x_mouse__15  -0.000    0.003   -0.022  0.982  -0.006   0.006
stim | neuron_x_mouse__16  -0.003    0.003   -0.953  0.341  -0.009   0.003
stim | neuron_x_mouse__17  -0.011    0.003   -3.359  0.001  -0.017  -0.005
Group Var                   0.000    0.017                                
Please use window.regression_charts(): mouse is not in Index(['neuron_x_mouse', 'center interval', 'Std.Err.', 'z', 'p',
       'higher interval', 'lower interval', 'zero'],
      dtype='object')
<bayes_window.workflow.BayesWindow at 0x7fc9b8204880>
bw.chart
../_images/quickstart_33_0.png

Need nested design, but get singular matrix:

bw = LMERegression(df=df, y='isi', treatment='stim', condition=['neuron_x_mouse'], group='mouse', )
try:
    bw.fit(add_data=False, add_group_intercept=True, add_group_slope=True, add_nested_group=True)
except Exception as e:
    print(e)
Using formula isi ~ (stim|mouse) + stim| neuron_x_mouse__0:mouse + stim|neuron_x_mouse__1:mouse + stim|neuron_x_mouse__2:mouse + stim|neuron_x_mouse__3:mouse + stim|neuron_x_mouse__4:mouse + stim|neuron_x_mouse__5:mouse + stim|neuron_x_mouse__6:mouse + stim|neuron_x_mouse__7:mouse + stim|neuron_x_mouse__8:mouse + stim|neuron_x_mouse__9:mouse + stim|neuron_x_mouse__10:mouse + stim|neuron_x_mouse__11:mouse + stim|neuron_x_mouse__12:mouse + stim|neuron_x_mouse__13:mouse + stim|neuron_x_mouse__14:mouse + stim|neuron_x_mouse__15:mouse + stim|neuron_x_mouse__16:mouse + stim|neuron_x_mouse__17:mouse
Singular matrix